With Map

Column

Map

Column

Chart and table

Parallel Coordinates

Column

Interactive chart and table

Column

Coordinate graph

---
title: "Brushing DHIS2 Events"
output:
  flexdashboard::flex_dashboard:
    orientation: columns
    source_code: embed
  html_document:
    df_print: paged
---

```{r setup_1, include=TRUE, echo = FALSE, warning=FALSE, message=FALSE}
knitr::opts_chunk$set(echo=FALSE, warning=FALSE, paged.print=FALSE)
options(encoding = "UTF-8")
#IMPORTANT MANUAL STEPS BEFORE RUNNING
#credentials and global variables#
#(include https://  and last backslash of DHIS2 URL, in quotes)
baseurl<-"https://play.dhis2.org/2.32.2/"
username<-"admin"
password<-"district"
##______Now load packages and set up_____##
##first load required packages
required_packages <- c("ggplot2","stringr","httr","jsonlite","assertthat","readr","tibble","plyr","tidyr","dygraphs","xts","lubridate","knitr","askpass","DT","keyring","devtools","crosstalk","plotly","knitr","leaflet","parcoords")
#define the function to check that a package is installed
is_installed <- function(mypkg) is.element(mypkg, installed.packages()[,1])
#check if each required package is installed, if it isnt install it from CRAN, if it is, then load it
load_or_install<-function(required_packages) {
  for(package_name in required_packages)  {
    if(!is_installed(package_name))     {
       install.packages(package_name,repos="http://lib.stat.cmu.edu/R/CRAN")  }
    library(package_name,character.only=TRUE,quietly=TRUE,verbose=FALSE)
  }
}
#run the function
load_or_install(required_packages)
#login
loginDHIS2<-function(baseurl,username) {
    url<-paste0(baseurl,"api/me")
    r<-GET(url,authenticate(username, password))}
    loginDHIS2(baseurl,username)
```



```{r get data, message=FALSE, warning=FALSE, paged.print=FALSE}
url<-paste0(baseurl,
            "api/29/analytics/events/query/WSGAb5XwJ3Y.csv?", #event query for "" program
            "dimension=pe:LAST_3_MONTHS", #Events last month
            "&stage=edqlbukwRfQ&dimension=ou:ImspTQPwCqd", #stage and org unit (root)
          
            #data dimensions and other display properties
            "&dimension=tYPIZKEVh23&dimension=vANAXwtLwcT&dimension=w75KJ2mc4zz&", 
            "displayProperty=NAME&tableLayout=true&columns=pe;ou;tYPIZKEVh23;vANAXwtLwcT;w75KJ2mc4zz&rows=pe;ou;tYPIZKEVh23;vANAXwtLwcT;w75KJ2mc4zz") 
#now we GET command to fetch the data, and then read the content into a dataframe
output<-httr::GET(url)
report1 <-read_csv(content(output))
#the extension will allow us to pretty print, with a hyperlink
#create this as a function so we can come back to it later
makeDT <-function(x){
              DT::datatable(x,
              filter = 'bottom',
              caption = "Event report example",
              extensions = 'Buttons', 
              escape = FALSE,
              rownames = FALSE,
              width= "100%",
              class = "compact",
              options = list(
                pageLength = 5,
                dom = 'Blfrtip', 
                buttons = list(I("colvis"),
                c('copy', 'csv', 'excel', 'pdf')),
                columnDefs = list(list(visible=FALSE, 
                                     targets=c(grep("longitude", colnames(x)),
                                               grep("latitude", colnames(x)),
                                               grep("point", colnames(x)))))))
}
```


```{r get enrollment IDs, warning=FALSE, message=FALSE}
#get start date for "last 3 months"
fourmonthsago <-round_date(ymd(Sys.Date()), unit = "month") %m-% months(4) 
#We will use the EVENTS resource. 
#This allows us to query directly for all events in a program and stage, 
#but not based on data values. 
#For simplicity we'll just get first 1000 records.
url<-paste0(baseurl,"api/29/events.json?fields=enrollment,event,trackedEntityInstance",
                    "&program=WSGAb5XwJ3Y&startDate=",fourmonthsago,
                    "&stage=edqlbukwRfQ",
                    "&pageSize=",1000)
enrollment_IDs <-fromJSON(content(GET(url),"text"))
#Now we will join the enrollment IDs to the event report
#We will also create a link to the record in the tracker capture app
events_with_enrollments<- report1 %>%
  dplyr::inner_join(enrollment_IDs$events, by = c("Event" = "event")) %>%
    mutate("Event date"=as.Date(lubridate::ymd_hms(`Event date`, tz = "UTC"))) %>%
  mutate("Record Link"=paste0("Record Link")) %>%
  #just select important variables and arrange
  select(c(9, 3, `Organisation unit`:`Record Link`))%>%
  select(-c("trackedEntityInstance", `Organisation unit`)) %>%
  arrange(desc(`Event date`))
```


```{r basic map, warning=FALSE}
library(leaflet)
library(sf)
library(dplyr)
#get OU coordinates and merge with the data
url<-paste0(baseurl,"api/organisationUnits.csv?paging=false&fields=name,geometry&filter=geometry:!null")
output <-read_csv(content(GET(url)))
#merge the name of the org units with the event data
mapobj<-events_with_enrollments %>%
dplyr::rename("name"=`Organisation unit name`) %>%
dplyr::inner_join(output) %>%
separate(geometry, c("point", "longitude","latitude"), sep = " ", extra = "drop") %>%
select(-point)
mapobj$latitude<-parse_number(mapobj$latitude)
mapobj$longitude<-parse_number(mapobj$longitude)
```

```{r linked map, warning=FALSE}
#now we apply the map as a shared data object!
sd <- SharedData$new(mapobj, group="grp1")
p<-plot_ly(sd, x= ~`WHO RMNCH Days enrolled`, 
          y= ~`WHOMCH Hemoglobin value`,
          color = ~name,
          size = 3,
          showlegend=FALSE) %>% 
  add_markers(alpha = 0.5) %>%
  highlight("plotly_selected") %>%
  layout(height=200)
#We'll use the custom datatable function from earlier on the shared data
dt<-makeDT(sd)
```


With Map {data-icon="ion-stats-bars"}
=====================================  

Column {data-width=300}
-------------------------------------

### Map
```{r interactive map}
map<- leaflet(sd, width = "100%", height = 300) %>%
    addTiles() %>%
    addMarkers(lat = ~latitude, lng = ~longitude, popup = ~name)
map
```


Column {data-width=700}  
-------------------------------------
    
### Chart and table

```{r other interactive, fig.height=6}
bscols(list(
  p,
dt
))
```

Parallel Coordinates {data-icon="ion-stats-bars"}
=====================================  

Column {data-width=500} 
-------------------------------------

### Interactive chart and table
```{r setup_new, include=TRUE, echo = FALSE, warning=FALSE, message=FALSE,fig.height=8}
knitr::opts_chunk$set(echo=FALSE, warning=FALSE, paged.print=FALSE)
options(encoding = "UTF-8")
#IMPORTANT MANUAL STEPS BEFORE RUNNING
#credentials and global variables#
#(include https://  and last backslash of DHIS2 URL, in quotes)

#switch over to 2.29

baseurl<-"https://play.dhis2.org/2.29/"
username<-"admin"
password<-"district"
##______Now load packages and set up_____##
##first load required packages
required_packages <- c("ggplot2","stringr","httr","jsonlite","assertthat","readr","tibble","plyr","tidyr","dygraphs","xts","lubridate","knitr","askpass","DT","keyring","devtools","crosstalk","plotly","knitr","parcoords")
#define the function to check that a package is installed
is_installed <- function(mypkg) is.element(mypkg, installed.packages()[,1])
#check if each required package is installed, if it isnt install it from CRAN, if it is, then load it
load_or_install<-function(required_packages) {
  for(package_name in required_packages)  {
    if(!is_installed(package_name))     {
       install.packages(package_name,repos="http://lib.stat.cmu.edu/R/CRAN")  }
    library(package_name,character.only=TRUE,quietly=TRUE,verbose=FALSE)
  }
}

#run the function
load_or_install(required_packages)
#login
loginDHIS2<-function(baseurl,username) {
    url<-paste0(baseurl,"api/me")
    r<-GET(url,authenticate(username, password))}

loginDHIS2(baseurl,username)

url<-paste0(baseurl,"api/29/analytics/events/query/eBAyeGv0exc.csv?dimension=pe:201812;201811&dimension=ou:ImspTQPwCqd&dimension=x7PaHGvgWY2&dimension=vV9UWAZohSf&dimension=GieVkTxp4HH&dimension=qrur9Dvnyt5&dimension=b8hd33dWjR6&dimension=oZg33kd9taw&stage=Zj7UnCAulEk&displayProperty=NAME&tableLayout=true&columns=pe;ou;x7PaHGvgWY2;vV9UWAZohSf;GieVkTxp4HH;qrur9Dvnyt5;b8hd33dWjR6;oZg33kd9taw&rows=pe;ou;x7PaHGvgWY2;vV9UWAZohSf;GieVkTxp4HH;qrur9Dvnyt5;b8hd33dWjR6;oZg33kd9taw") 
#now we GET command to fetch the data, and then read the content into a dataframe
report1 <-read_csv(content(GET(url)))

colength<-length(colnames(report1))

#select only important variables, order by length
r1 <- report1 %>%
  arrange(`Event date`) %>%
  select((colength-5):colength)


#since this is a big dataset well just get the longest 
new_r1<-head(r1, 1000)

sd <- crosstalk::SharedData$new(new_r1, group="grp2")

pc <- parcoords(sd, 
          brushMode = '1d',
          alphaOnBrushed = 0.2,
          queue = TRUE,
          rate = 50,
          reorderable = TRUE,
          width='500')

dt<-makeDT(sd)

dt
```

Column {data-width=500} 
-------------------------------------

### Coordinate graph

```{r parcoords}

pc


```